import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow.keras import layers, models, losses
import matplotlib.pyplot as plt
import os
2024-07-24 21:05:04.052829: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered 2024-07-24 21:05:04.052935: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered 2024-07-24 21:05:04.210948: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
os.listdir("/kaggle/input/face-expressions")
['FaceExpressions']
# Set the paths
base_dir = r'/kaggle/input/face-expressions/FaceExpressions/dataset'
data_path = r'/kaggle/input/face-expressions/FaceExpressions/data.csv'
# Load the data
data = pd.read_csv(data_path)
print(data.head())
Unnamed: 0 path label 0 0 Surprise/1bd930d6a1c717c11be33db74823f661cb53f... Surprise 1 1 Surprise/cropped_emotions.100096~12fffff.png Surprise 2 2 Surprise/0df0e470e33093f5b72a8197fa209d684032c... Surprise 3 3 Surprise/cropped_emotions.260779~12fffff.png Surprise 4 4 Surprise/cropped_emotions.263616~12fffff.png Surprise
img_path = r'/kaggle/input/face-expressions/FaceExpressions/dataset/'+data['path'][2]
img_path
'/kaggle/input/face-expressions/FaceExpressions/dataset/Surprise/0df0e470e33093f5b72a8197fa209d684032cc17f68194e605a26897~12fffff.jpg'
import cv2
img = cv2.imread(img_path)
img
array([[[106, 48, 197],
[105, 47, 196],
[104, 46, 195],
...,
[117, 59, 208],
[118, 60, 209],
[118, 60, 209]],
[[103, 45, 194],
[103, 45, 194],
[102, 44, 193],
...,
[118, 60, 209],
[117, 59, 208],
[117, 59, 208]],
[[103, 45, 194],
[102, 44, 193],
[102, 44, 193],
...,
[118, 60, 209],
[117, 59, 208],
[116, 58, 207]],
...,
[[ 3, 3, 21],
[ 1, 1, 17],
[ 0, 1, 15],
...,
[ 2, 2, 2],
[ 0, 1, 2],
[ 0, 2, 5]],
[[ 3, 2, 18],
[ 1, 0, 16],
[ 0, 0, 14],
...,
[ 1, 1, 1],
[ 0, 1, 2],
[ 0, 1, 2]],
[[ 2, 1, 17],
[ 0, 0, 14],
[ 0, 0, 13],
...,
[ 1, 1, 1],
[ 0, 1, 2],
[ 0, 1, 2]]], dtype=uint8)
plt.imshow(img)
<matplotlib.image.AxesImage at 0x7b69672283a0>
img.shape
(204, 204, 3)
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
plt.imshow(img_rgb)
<matplotlib.image.AxesImage at 0x7b695c6c4580>
def load_and_process_image(image_path):
img = cv2.imread(image_path)
if img is not None:
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_resize = cv2.resize(img_rgb, (128, 128))
image_reshape = img_resize.reshape((128, 128, 1)) / 255.0
return image_reshape
else:
return None
# Initialize lists to store image data and labels
images = []
labels = []
# processing images
for index, row in data.iterrows():
img_path = os.path.join(base_dir, row['path'])
img = load_and_process_image(img_path)
if img is not None:
images.append(img)
labels.append(row['label'])
# storing images and labels in data frame
df = pd.DataFrame({'image': images, 'label': labels})
# Verify the DataFrame
print(df.head())
image label 0 [[[0.9254901960784314], [0.9254901960784314], ... Surprise 1 [[[0.29411764705882354], [0.27450980392156865]... Surprise 2 [[[0.3843137254901961], [0.3764705882352941], ... Surprise 3 [[[0.7450980392156863], [0.7450980392156863], ... Surprise 4 [[[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0... Surprise
# Display sample images to verify
print(df['label'][1000])
plt.imshow(df['image'][1000])
plt.axis('off')
plt.show()
df['image'][1000].shape
Surprise
(128, 128, 1)
print(df['label'][2000])
plt.imshow(df['image'][2000])
plt.axis('off')
plt.show()
df['image'][2000].shape
Sad
(128, 128, 1)
print(df['label'][6000])
plt.imshow(df['image'][6000])
plt.axis('off')
plt.show()
df['image'][6000].shape
Ahegao
(128, 128, 1)
print(df['label'][7000])
plt.imshow(df['image'][7000])
plt.axis('off')
plt.show()
df['image'][7000].shape
Happy
(128, 128, 1)
print(df['label'][11000])
plt.imshow(df['image'][11000])
plt.axis('off')
plt.show()
df['image'][11000].shape
Neutral
(128, 128, 1)
print(df['label'][15000])
plt.imshow(df['image'][15000])
plt.axis('off')
plt.show()
df['image'][15000].shape
Angry
(128, 128, 1)
# checking the distribution of dataa
label_counts = df['label'].value_counts()
print(label_counts)
label Neutral 4027 Sad 3934 Happy 3740 Angry 1313 Surprise 1234 Ahegao 1205 Name: count, dtype: int64
from sklearn.model_selection import train_test_split
min_samples_per_label = df.groupby('label').size().min()
print(min_samples_per_label)
n_samples_per_label = min(min_samples_per_label, 4000)
sampled_df = df.groupby('label').apply(lambda x: x.sample(n=n_samples_per_label, random_state=42)).reset_index(drop=True)
train_df, test_df = train_test_split(sampled_df, train_size=5500, test_size=1500, stratify=sampled_df['label'], random_state=42)
1205
/tmp/ipykernel_34/1163303.py:6: DeprecationWarning: DataFrameGroupBy.apply operated on the grouping columns. This behavior is deprecated, and in a future version of pandas the grouping columns will be excluded from the operation. Either pass `include_groups=False` to exclude the groupings or explicitly select the grouping columns after groupby to silence this warning.
sampled_df = df.groupby('label').apply(lambda x: x.sample(n=n_samples_per_label, random_state=42)).reset_index(drop=True)
# checking the class distribution of training and testing data
print("Train set label distribution:")
print(train_df['label'].value_counts())
print("Test set label distribution:")
print(test_df['label'].value_counts())
Train set label distribution: label Angry 917 Ahegao 917 Surprise 917 Happy 917 Neutral 916 Sad 916 Name: count, dtype: int64 Test set label distribution: label Neutral 250 Surprise 250 Sad 250 Ahegao 250 Angry 250 Happy 250 Name: count, dtype: int64
# Converting dataframes to lists of images and labels for training
train_images = train_df['image'].tolist()
train_labels = train_df['label'].tolist()
test_images = test_df['image'].tolist()
test_labels = test_df['label'].tolist()
plt.imshow(train_images[0])
<matplotlib.image.AxesImage at 0x7b694aa40400>
batch_size=16
# For convolutional VAE (non-flattened images)
conv_train_dataset=(tf.data.Dataset.from_tensor_slices(train_images).batch(batch_size))
conv_test_dataset=(tf.data.Dataset.from_tensor_slices(test_images).batch(batch_size))
from keras.layers import LeakyReLU
class CVAE(tf.keras.Model):
def __init__(self, latent_dim):
super(CVAE, self).__init__()
self.latent_dim = latent_dim
self.encoder = tf.keras.Sequential(
[
# tf.keras.layers.InputLayer(shape=(128, 128, 1)),
# tf.keras.layers.Conv2D(filters=32, kernel_size=5, strides=(2, 2)), LeakyReLU(),
# tf.keras.layers.Conv2D(filters=64, kernel_size=5, strides=(2, 2)), LeakyReLU(),
# tf.keras.layers.Conv2D(filters=128, kernel_size=5, strides=(2, 2)), LeakyReLU(),
# tf.keras.layers.Flatten(),
# tf.keras.layers.Dense(128, activation='relu'),
# tf.keras.layers.Dense(latent_dim + latent_dim),
tf.keras.layers.InputLayer(input_shape=(128, 128, 1)),
tf.keras.layers.Conv2D(filters=32, kernel_size=3, strides=(2, 2), activation='leaky_relu'),
tf.keras.layers.Conv2D(filters=64, kernel_size=3, strides=(2, 2), activation='leaky_relu'),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(latent_dim + latent_dim),
]
)
self.decoder = tf.keras.Sequential(
[
# layers.InputLayer(shape=(latent_dim,)),
# layers.Dense(units=16*16*128, activation=tf.nn.leaky_relu),
# layers.Reshape(target_shape=(16, 16, 128)),
# layers.Conv2DTranspose(128, kernel_size=5, strides=2, padding='same'), LeakyReLU(),
# layers.Conv2DTranspose(64, kernel_size=5, strides=2, padding='same', LeakyReLU(),
# layers.Conv2DTranspose(32, kernel_size=5, strides=2, padding='same', LeakyReLU(),
# layers.Conv2DTranspose(1, kernel_size=5, strides=1, padding='same'),
tf.keras.layers.InputLayer(input_shape=(latent_dim,)),
tf.keras.layers.Dense(units=32*32*128, activation=tf.nn.leaky_relu),
tf.keras.layers.Reshape(target_shape=(32, 32, 128)),
tf.keras.layers.Conv2DTranspose(filters=128, kernel_size=3, strides=2, padding='same', activation='leaky_relu'),
tf.keras.layers.Conv2DTranspose(filters=64, kernel_size=3, strides=2, padding='same', activation='leaky_relu'),
tf.keras.layers.Conv2DTranspose(filters=32, kernel_size=3, strides=1, padding='same', activation='leaky_relu'),
tf.keras.layers.Conv2DTranspose(filters=1, kernel_size=3, strides=1, padding='same', activation='sigmoid'),
]
)
@tf.function
def sample(self, eps=None):
if eps is None:
eps = tf.random.normal(shape=(100, self.latent_dim))
return self.decode(eps, apply_sigmoid=True)
def encode(self, x):
mean, logvar = tf.split(self.encoder(x), num_or_size_splits=2, axis=1)
return mean, logvar
def reparameterize(self, mean, logvar):
eps = tf.random.normal(shape=mean.shape)
return eps * tf.exp(logvar * .5) + mean
def decode(self, z, apply_sigmoid=False):
logits = self.decoder(z)
if apply_sigmoid:
probs = tf.sigmoid(logits)
return probs
return logits
def call(self, x):
mean, logvar = self.encode(x)
z = self.reparameterize(mean, logvar)
x_logit = self.decode(z)
return x_logit, mean, logvar
optimizer = tf.keras.optimizers.Adam(0.0005)
def log_normal_pdf(sample, mean, logvar, raxis=1):
log2pi = tf.math.log(2. * np.pi)
return tf.reduce_sum(
-.5 * ((sample - mean) ** 2. * tf.exp(-logvar) + logvar + log2pi),
axis=raxis)
def compute_loss(model, x):
"""Computes the loss for the model given input x."""
reconstruction, z_mean, z_log_var = model(x)
# computing the reconstruction loss
reconstruction_loss = tf.reduce_mean(tf.keras.losses.binary_crossentropy(x, reconstruction))
# computing the KL divergence loss
kl_loss = -0.5 * tf.reduce_mean(1 + z_log_var - tf.square(z_mean) - tf.exp(z_log_var))
return reconstruction_loss, kl_loss
@tf.function
def train_step(model, x, optimizer):
# Ensure x is of type float32
x = tf.cast(x, dtype=tf.float32)
with tf.GradientTape() as tape:
loss = compute_loss(model, x)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
epochs = 200
latent_dim = 300
num_examples_to_generate = 16
# keeping the random vector constant for generation (prediction) so
# it will be easier to see the improvement.
random_vector_for_generation = tf.random.normal(
shape=[num_examples_to_generate, latent_dim])
model_1_1 = CVAE(latent_dim)
/opt/conda/lib/python3.10/site-packages/keras/src/layers/core/input_layer.py:26: UserWarning: Argument `input_shape` is deprecated. Use `shape` instead. warnings.warn(
def generate_and_save_images(model, epoch, test_sample):
mean, logvar = model.encode(test_sample)
z = model.reparameterize(mean, logvar)
predictions = model.sample(z)
fig = plt.figure(figsize=(4, 4))
for i in range(predictions.shape[0]):
plt.subplot(4, 4, i + 1)
plt.imshow(predictions[i, :, :, 0], cmap='gray')
plt.axis('off')
plt.savefig('image_at_epoch_{:04d}.png'.format(epoch))
plt.show()
# Pick a sample of the test set for generating output images
assert batch_size >= num_examples_to_generate
for test_batch in conv_test_dataset.take(1):
test_sample = test_batch[0:num_examples_to_generate, :, :, :]
import time
from IPython.display import display, clear_output
generate_and_save_images(model_1_1, 0, test_sample)
# Training loop
for epoch in range(1, epochs + 1):
start_time = time.time()
# Training step for each batch
for train_x in conv_train_dataset:
train_step(model_1_1, train_x, optimizer)
end_time = time.time()
# Evaluation on test dataset
reconstruction_loss_metric = tf.keras.metrics.Mean()
kl_loss_metric = tf.keras.metrics.Mean()
for test_x in conv_test_dataset:
reconstruction_loss, kl_loss = compute_loss(model_1_1, test_x)
reconstruction_loss_metric(reconstruction_loss)
kl_loss_metric(kl_loss)
avg_reconstruction_loss = reconstruction_loss_metric.result()
avg_kl_loss = kl_loss_metric.result()
print(f'Epoch: {epoch}, Reconstruction Loss: {avg_reconstruction_loss}, '
f'KL Divergence Loss: {avg_kl_loss}, '
f'Time elapsed for current epoch: {end_time - start_time}')
generate_and_save_images(model_1_1, epoch, test_sample)
Epoch: 1, Reconstruction Loss: 0.6334434747695923, KL Divergence Loss: 0.01612023077905178, Time elapsed for current epoch: 28.803677082061768
Epoch: 2, Reconstruction Loss: 0.6309695243835449, KL Divergence Loss: 0.017119478434324265, Time elapsed for current epoch: 26.24641728401184
Epoch: 3, Reconstruction Loss: 0.6292463541030884, KL Divergence Loss: 0.01873774640262127, Time elapsed for current epoch: 26.508427381515503
Epoch: 4, Reconstruction Loss: 0.6266998648643494, KL Divergence Loss: 0.020409733057022095, Time elapsed for current epoch: 26.727035522460938
Epoch: 5, Reconstruction Loss: 0.6299741268157959, KL Divergence Loss: 0.016690736636519432, Time elapsed for current epoch: 26.93620491027832
Epoch: 6, Reconstruction Loss: 0.6287509799003601, KL Divergence Loss: 0.018352270126342773, Time elapsed for current epoch: 27.09821653366089
Epoch: 7, Reconstruction Loss: 0.6279080510139465, KL Divergence Loss: 0.017702694982290268, Time elapsed for current epoch: 27.23372983932495
Epoch: 8, Reconstruction Loss: 0.6269193887710571, KL Divergence Loss: 0.019283313304185867, Time elapsed for current epoch: 27.251188039779663
Epoch: 9, Reconstruction Loss: 0.624289333820343, KL Divergence Loss: 0.01936059258878231, Time elapsed for current epoch: 27.388208627700806
Epoch: 10, Reconstruction Loss: 0.6233632564544678, KL Divergence Loss: 0.018310459330677986, Time elapsed for current epoch: 27.46054196357727
Epoch: 11, Reconstruction Loss: 0.6211088299751282, KL Divergence Loss: 0.01982598379254341, Time elapsed for current epoch: 27.531123638153076
Epoch: 12, Reconstruction Loss: 0.6212453842163086, KL Divergence Loss: 0.018754256889224052, Time elapsed for current epoch: 27.636709928512573
Epoch: 13, Reconstruction Loss: 0.6206406950950623, KL Divergence Loss: 0.01905824989080429, Time elapsed for current epoch: 27.766445636749268
Epoch: 14, Reconstruction Loss: 0.6194289326667786, KL Divergence Loss: 0.018689390271902084, Time elapsed for current epoch: 27.687664270401
Epoch: 15, Reconstruction Loss: 0.6194958090782166, KL Divergence Loss: 0.01832975074648857, Time elapsed for current epoch: 27.674500942230225
Epoch: 16, Reconstruction Loss: 0.6179108023643494, KL Divergence Loss: 0.019842205569148064, Time elapsed for current epoch: 27.733917474746704
Epoch: 17, Reconstruction Loss: 0.618786096572876, KL Divergence Loss: 0.01853414624929428, Time elapsed for current epoch: 27.700578927993774
Epoch: 18, Reconstruction Loss: 0.6187006235122681, KL Divergence Loss: 0.018690938130021095, Time elapsed for current epoch: 27.687626123428345
Epoch: 19, Reconstruction Loss: 0.6175908446311951, KL Divergence Loss: 0.019383559003472328, Time elapsed for current epoch: 27.696423053741455
Epoch: 20, Reconstruction Loss: 0.6166380643844604, KL Divergence Loss: 0.019516395404934883, Time elapsed for current epoch: 27.705793142318726
Epoch: 21, Reconstruction Loss: 0.6169512867927551, KL Divergence Loss: 0.01927036978304386, Time elapsed for current epoch: 27.70087504386902
Epoch: 22, Reconstruction Loss: 0.6171629428863525, KL Divergence Loss: 0.019747480750083923, Time elapsed for current epoch: 27.684107780456543
Epoch: 23, Reconstruction Loss: 0.6166470050811768, KL Divergence Loss: 0.019053135067224503, Time elapsed for current epoch: 27.737218618392944
Epoch: 24, Reconstruction Loss: 0.616320788860321, KL Divergence Loss: 0.019650986418128014, Time elapsed for current epoch: 27.695723056793213
Epoch: 25, Reconstruction Loss: 0.6166563630104065, KL Divergence Loss: 0.019474338740110397, Time elapsed for current epoch: 27.746983766555786
Epoch: 26, Reconstruction Loss: 0.6168226003646851, KL Divergence Loss: 0.01929137483239174, Time elapsed for current epoch: 27.727799892425537
Epoch: 27, Reconstruction Loss: 0.6151266098022461, KL Divergence Loss: 0.02011881396174431, Time elapsed for current epoch: 27.71352243423462
Epoch: 28, Reconstruction Loss: 0.6151819229125977, KL Divergence Loss: 0.02020837739109993, Time elapsed for current epoch: 27.718229293823242
Epoch: 29, Reconstruction Loss: 0.6147292852401733, KL Divergence Loss: 0.02050618827342987, Time elapsed for current epoch: 27.74167776107788
Epoch: 30, Reconstruction Loss: 0.6135488748550415, KL Divergence Loss: 0.020951738581061363, Time elapsed for current epoch: 27.697484254837036
Epoch: 31, Reconstruction Loss: 0.6145895719528198, KL Divergence Loss: 0.019928783178329468, Time elapsed for current epoch: 27.698636293411255
Epoch: 32, Reconstruction Loss: 0.6153436303138733, KL Divergence Loss: 0.019212322309613228, Time elapsed for current epoch: 27.701183557510376
Epoch: 33, Reconstruction Loss: 0.6138133406639099, KL Divergence Loss: 0.01972844824194908, Time elapsed for current epoch: 27.70382785797119
Epoch: 34, Reconstruction Loss: 0.614165186882019, KL Divergence Loss: 0.02069682441651821, Time elapsed for current epoch: 27.724525928497314
Epoch: 35, Reconstruction Loss: 0.6125558614730835, KL Divergence Loss: 0.021174488589167595, Time elapsed for current epoch: 27.695673942565918
Epoch: 36, Reconstruction Loss: 0.613172709941864, KL Divergence Loss: 0.01999358832836151, Time elapsed for current epoch: 27.729888439178467
Epoch: 37, Reconstruction Loss: 0.6121538877487183, KL Divergence Loss: 0.02156165800988674, Time elapsed for current epoch: 27.67930841445923
Epoch: 38, Reconstruction Loss: 0.6127501726150513, KL Divergence Loss: 0.020808173343539238, Time elapsed for current epoch: 27.71529197692871
Epoch: 39, Reconstruction Loss: 0.612686812877655, KL Divergence Loss: 0.02103133499622345, Time elapsed for current epoch: 27.712064743041992
Epoch: 40, Reconstruction Loss: 0.612557590007782, KL Divergence Loss: 0.020528394728899002, Time elapsed for current epoch: 27.688101530075073
Epoch: 41, Reconstruction Loss: 0.613667905330658, KL Divergence Loss: 0.019450655207037926, Time elapsed for current epoch: 27.697892665863037
Epoch: 42, Reconstruction Loss: 0.6120443940162659, KL Divergence Loss: 0.02093476615846157, Time elapsed for current epoch: 27.70711398124695
Epoch: 43, Reconstruction Loss: 0.6116068959236145, KL Divergence Loss: 0.02097741700708866, Time elapsed for current epoch: 27.71214199066162
Epoch: 44, Reconstruction Loss: 0.611534833908081, KL Divergence Loss: 0.021019022911787033, Time elapsed for current epoch: 27.693002223968506
Epoch: 45, Reconstruction Loss: 0.6122273206710815, KL Divergence Loss: 0.019906766712665558, Time elapsed for current epoch: 27.724704265594482
Epoch: 46, Reconstruction Loss: 0.6118555665016174, KL Divergence Loss: 0.020969757810235023, Time elapsed for current epoch: 27.703402519226074
Epoch: 47, Reconstruction Loss: 0.61060631275177, KL Divergence Loss: 0.02153928019106388, Time elapsed for current epoch: 27.70816445350647
Epoch: 48, Reconstruction Loss: 0.6105924844741821, KL Divergence Loss: 0.021583471447229385, Time elapsed for current epoch: 27.672622442245483
Epoch: 49, Reconstruction Loss: 0.6119565963745117, KL Divergence Loss: 0.02045971341431141, Time elapsed for current epoch: 27.694605350494385
Epoch: 50, Reconstruction Loss: 0.6123954057693481, KL Divergence Loss: 0.02030699886381626, Time elapsed for current epoch: 27.68712306022644
Epoch: 51, Reconstruction Loss: 0.6114264130592346, KL Divergence Loss: 0.02179470658302307, Time elapsed for current epoch: 27.72402000427246
Epoch: 52, Reconstruction Loss: 0.6127563118934631, KL Divergence Loss: 0.019605785608291626, Time elapsed for current epoch: 27.697824239730835
Epoch: 53, Reconstruction Loss: 0.6116140484809875, KL Divergence Loss: 0.0206968504935503, Time elapsed for current epoch: 27.710899829864502
Epoch: 54, Reconstruction Loss: 0.6115760207176208, KL Divergence Loss: 0.02025662362575531, Time elapsed for current epoch: 27.700254201889038
Epoch: 55, Reconstruction Loss: 0.611582338809967, KL Divergence Loss: 0.020929723978042603, Time elapsed for current epoch: 27.746068954467773
Epoch: 56, Reconstruction Loss: 0.6112327575683594, KL Divergence Loss: 0.02142840437591076, Time elapsed for current epoch: 27.721177339553833
Epoch: 57, Reconstruction Loss: 0.6126358509063721, KL Divergence Loss: 0.02012498490512371, Time elapsed for current epoch: 27.72334098815918
Epoch: 58, Reconstruction Loss: 0.6114612817764282, KL Divergence Loss: 0.020849471911787987, Time elapsed for current epoch: 27.75596570968628
Epoch: 59, Reconstruction Loss: 0.6106481552124023, KL Divergence Loss: 0.021474307402968407, Time elapsed for current epoch: 27.757460117340088
Epoch: 60, Reconstruction Loss: 0.611725926399231, KL Divergence Loss: 0.020694365724921227, Time elapsed for current epoch: 27.760886192321777
Epoch: 61, Reconstruction Loss: 0.6121079325675964, KL Divergence Loss: 0.02095578797161579, Time elapsed for current epoch: 27.87461543083191
Epoch: 62, Reconstruction Loss: 0.6117832660675049, KL Divergence Loss: 0.02040698379278183, Time elapsed for current epoch: 27.803385019302368
Epoch: 63, Reconstruction Loss: 0.611717939376831, KL Divergence Loss: 0.0214700885117054, Time elapsed for current epoch: 27.80099320411682
Epoch: 64, Reconstruction Loss: 0.6102436184883118, KL Divergence Loss: 0.021586818620562553, Time elapsed for current epoch: 27.79105019569397
Epoch: 65, Reconstruction Loss: 0.6104190349578857, KL Divergence Loss: 0.021997367963194847, Time elapsed for current epoch: 27.846931219100952
Epoch: 66, Reconstruction Loss: 0.6101277470588684, KL Divergence Loss: 0.021739432588219643, Time elapsed for current epoch: 27.835740327835083
Epoch: 67, Reconstruction Loss: 0.6112824082374573, KL Divergence Loss: 0.020138347521424294, Time elapsed for current epoch: 27.767496347427368
Epoch: 68, Reconstruction Loss: 0.61024409532547, KL Divergence Loss: 0.021153850480914116, Time elapsed for current epoch: 27.79365611076355
Epoch: 69, Reconstruction Loss: 0.6109185218811035, KL Divergence Loss: 0.020944762974977493, Time elapsed for current epoch: 27.812073945999146
Epoch: 70, Reconstruction Loss: 0.6108502149581909, KL Divergence Loss: 0.020764004439115524, Time elapsed for current epoch: 27.84951615333557
Epoch: 71, Reconstruction Loss: 0.6099850535392761, KL Divergence Loss: 0.021462300792336464, Time elapsed for current epoch: 27.833810806274414
Epoch: 72, Reconstruction Loss: 0.6107934713363647, KL Divergence Loss: 0.021508021280169487, Time elapsed for current epoch: 27.834917545318604
Epoch: 73, Reconstruction Loss: 0.6102215647697449, KL Divergence Loss: 0.021136287599802017, Time elapsed for current epoch: 27.822112798690796
Epoch: 74, Reconstruction Loss: 0.6106767654418945, KL Divergence Loss: 0.021333100274205208, Time elapsed for current epoch: 27.83943200111389
Epoch: 75, Reconstruction Loss: 0.6110193729400635, KL Divergence Loss: 0.021220846101641655, Time elapsed for current epoch: 27.834189891815186
Epoch: 76, Reconstruction Loss: 0.6105163097381592, KL Divergence Loss: 0.02078898996114731, Time elapsed for current epoch: 27.788784980773926
Epoch: 77, Reconstruction Loss: 0.6116193532943726, KL Divergence Loss: 0.021188434213399887, Time elapsed for current epoch: 27.8258855342865
Epoch: 78, Reconstruction Loss: 0.6111419200897217, KL Divergence Loss: 0.02067631483078003, Time elapsed for current epoch: 27.82484221458435
Epoch: 79, Reconstruction Loss: 0.6103665232658386, KL Divergence Loss: 0.021312035620212555, Time elapsed for current epoch: 27.810315132141113
Epoch: 80, Reconstruction Loss: 0.6100727915763855, KL Divergence Loss: 0.021259335801005363, Time elapsed for current epoch: 27.83662486076355
Epoch: 81, Reconstruction Loss: 0.6112625002861023, KL Divergence Loss: 0.02115064486861229, Time elapsed for current epoch: 27.803844690322876
Epoch: 82, Reconstruction Loss: 0.6109524965286255, KL Divergence Loss: 0.02100122533738613, Time elapsed for current epoch: 27.816694021224976
Epoch: 83, Reconstruction Loss: 0.6110220551490784, KL Divergence Loss: 0.021403037011623383, Time elapsed for current epoch: 27.78225350379944
Epoch: 84, Reconstruction Loss: 0.6109353303909302, KL Divergence Loss: 0.020637547597289085, Time elapsed for current epoch: 27.764026641845703
Epoch: 85, Reconstruction Loss: 0.6103747487068176, KL Divergence Loss: 0.021454256027936935, Time elapsed for current epoch: 27.834697008132935
Epoch: 86, Reconstruction Loss: 0.6101019978523254, KL Divergence Loss: 0.021461879834532738, Time elapsed for current epoch: 27.814507961273193
Epoch: 87, Reconstruction Loss: 0.6107648015022278, KL Divergence Loss: 0.021790482103824615, Time elapsed for current epoch: 27.82379412651062
Epoch: 88, Reconstruction Loss: 0.6096268892288208, KL Divergence Loss: 0.02173497900366783, Time elapsed for current epoch: 27.845579147338867
Epoch: 89, Reconstruction Loss: 0.6103676557540894, KL Divergence Loss: 0.021191364154219627, Time elapsed for current epoch: 27.82938504219055
Epoch: 90, Reconstruction Loss: 0.610327959060669, KL Divergence Loss: 0.02111044153571129, Time elapsed for current epoch: 27.784464359283447
Epoch: 91, Reconstruction Loss: 0.6115627884864807, KL Divergence Loss: 0.020216401666402817, Time elapsed for current epoch: 27.754796743392944
Epoch: 92, Reconstruction Loss: 0.6110372543334961, KL Divergence Loss: 0.0204300656914711, Time elapsed for current epoch: 27.836215496063232
Epoch: 93, Reconstruction Loss: 0.6105143427848816, KL Divergence Loss: 0.020734887570142746, Time elapsed for current epoch: 27.822433948516846
Epoch: 94, Reconstruction Loss: 0.611168622970581, KL Divergence Loss: 0.020988253876566887, Time elapsed for current epoch: 27.849552869796753
Epoch: 95, Reconstruction Loss: 0.6113207340240479, KL Divergence Loss: 0.02094285376369953, Time elapsed for current epoch: 27.82696557044983
Epoch: 96, Reconstruction Loss: 0.6109811067581177, KL Divergence Loss: 0.02126327157020569, Time elapsed for current epoch: 27.827494144439697
Epoch: 97, Reconstruction Loss: 0.6108459830284119, KL Divergence Loss: 0.020505109801888466, Time elapsed for current epoch: 27.810203552246094
Epoch: 98, Reconstruction Loss: 0.6107162833213806, KL Divergence Loss: 0.020713476464152336, Time elapsed for current epoch: 27.819000005722046
Epoch: 99, Reconstruction Loss: 0.6106010675430298, KL Divergence Loss: 0.02219085954129696, Time elapsed for current epoch: 27.79438829421997
Epoch: 100, Reconstruction Loss: 0.6099579334259033, KL Divergence Loss: 0.021888166666030884, Time elapsed for current epoch: 27.797505140304565
Epoch: 101, Reconstruction Loss: 0.6103699803352356, KL Divergence Loss: 0.02143835835158825, Time elapsed for current epoch: 27.755250215530396
Epoch: 102, Reconstruction Loss: 0.6107051372528076, KL Divergence Loss: 0.020835265517234802, Time elapsed for current epoch: 27.760326862335205
Epoch: 103, Reconstruction Loss: 0.6107614636421204, KL Divergence Loss: 0.020635973662137985, Time elapsed for current epoch: 27.734230518341064
Epoch: 104, Reconstruction Loss: 0.6093379259109497, KL Divergence Loss: 0.022365372627973557, Time elapsed for current epoch: 27.762243270874023
Epoch: 105, Reconstruction Loss: 0.6104241013526917, KL Divergence Loss: 0.02126396633684635, Time elapsed for current epoch: 27.72120475769043
Epoch: 106, Reconstruction Loss: 0.6097298264503479, KL Divergence Loss: 0.02183590456843376, Time elapsed for current epoch: 27.722075939178467
Epoch: 107, Reconstruction Loss: 0.6108608841896057, KL Divergence Loss: 0.020088618621230125, Time elapsed for current epoch: 27.72103214263916
Epoch: 108, Reconstruction Loss: 0.6111891865730286, KL Divergence Loss: 0.020477941259741783, Time elapsed for current epoch: 27.739749908447266
Epoch: 109, Reconstruction Loss: 0.6107543110847473, KL Divergence Loss: 0.02104366198182106, Time elapsed for current epoch: 27.705388069152832
Epoch: 110, Reconstruction Loss: 0.6109455823898315, KL Divergence Loss: 0.020561689510941505, Time elapsed for current epoch: 27.710184812545776
Epoch: 111, Reconstruction Loss: 0.6103914380073547, KL Divergence Loss: 0.02170729823410511, Time elapsed for current epoch: 27.748387098312378
Epoch: 112, Reconstruction Loss: 0.6096560955047607, KL Divergence Loss: 0.021684646606445312, Time elapsed for current epoch: 27.724509954452515
Epoch: 113, Reconstruction Loss: 0.6108221411705017, KL Divergence Loss: 0.02085893787443638, Time elapsed for current epoch: 27.736307621002197
Epoch: 114, Reconstruction Loss: 0.6116098165512085, KL Divergence Loss: 0.020360520109534264, Time elapsed for current epoch: 27.729079008102417
Epoch: 115, Reconstruction Loss: 0.6095576286315918, KL Divergence Loss: 0.02192881517112255, Time elapsed for current epoch: 27.733633041381836
Epoch: 116, Reconstruction Loss: 0.6096834540367126, KL Divergence Loss: 0.021766213700175285, Time elapsed for current epoch: 27.731425285339355
Epoch: 117, Reconstruction Loss: 0.6096188426017761, KL Divergence Loss: 0.02108873799443245, Time elapsed for current epoch: 27.74028730392456
Epoch: 118, Reconstruction Loss: 0.6105811595916748, KL Divergence Loss: 0.021587397903203964, Time elapsed for current epoch: 27.718379020690918
Epoch: 119, Reconstruction Loss: 0.6108219623565674, KL Divergence Loss: 0.021108251065015793, Time elapsed for current epoch: 27.74603772163391
Epoch: 120, Reconstruction Loss: 0.6101693511009216, KL Divergence Loss: 0.021021364256739616, Time elapsed for current epoch: 27.710760354995728
Epoch: 121, Reconstruction Loss: 0.6097108721733093, KL Divergence Loss: 0.021716071292757988, Time elapsed for current epoch: 27.678985118865967
Epoch: 122, Reconstruction Loss: 0.6103003621101379, KL Divergence Loss: 0.02101094461977482, Time elapsed for current epoch: 27.707064390182495
Epoch: 123, Reconstruction Loss: 0.6087931990623474, KL Divergence Loss: 0.02218671515583992, Time elapsed for current epoch: 27.70934247970581
Epoch: 124, Reconstruction Loss: 0.6091229319572449, KL Divergence Loss: 0.022170191630721092, Time elapsed for current epoch: 27.726712703704834
Epoch: 125, Reconstruction Loss: 0.6090856790542603, KL Divergence Loss: 0.021927334368228912, Time elapsed for current epoch: 27.73672842979431
Epoch: 126, Reconstruction Loss: 0.609094500541687, KL Divergence Loss: 0.021797027438879013, Time elapsed for current epoch: 27.734150409698486
Epoch: 127, Reconstruction Loss: 0.6087789535522461, KL Divergence Loss: 0.022403329610824585, Time elapsed for current epoch: 27.737274169921875
Epoch: 128, Reconstruction Loss: 0.6099382042884827, KL Divergence Loss: 0.021150190383195877, Time elapsed for current epoch: 27.705924034118652
Epoch: 129, Reconstruction Loss: 0.6101565361022949, KL Divergence Loss: 0.020763399079442024, Time elapsed for current epoch: 27.720948219299316
Epoch: 130, Reconstruction Loss: 0.608797013759613, KL Divergence Loss: 0.021893812343478203, Time elapsed for current epoch: 27.729616165161133
Epoch: 131, Reconstruction Loss: 0.6095482707023621, KL Divergence Loss: 0.02147282101213932, Time elapsed for current epoch: 27.707898378372192
Epoch: 132, Reconstruction Loss: 0.6105772256851196, KL Divergence Loss: 0.020662041381001472, Time elapsed for current epoch: 27.727611780166626
Epoch: 133, Reconstruction Loss: 0.6097767353057861, KL Divergence Loss: 0.02119467966258526, Time elapsed for current epoch: 27.714409112930298
Epoch: 134, Reconstruction Loss: 0.6093569397926331, KL Divergence Loss: 0.021664435043931007, Time elapsed for current epoch: 27.710995197296143
Epoch: 135, Reconstruction Loss: 0.6096532940864563, KL Divergence Loss: 0.021124672144651413, Time elapsed for current epoch: 27.711761951446533
Epoch: 136, Reconstruction Loss: 0.6096300482749939, KL Divergence Loss: 0.021263200789690018, Time elapsed for current epoch: 27.709293842315674
Epoch: 137, Reconstruction Loss: 0.610166609287262, KL Divergence Loss: 0.02140236832201481, Time elapsed for current epoch: 27.704522609710693
Epoch: 138, Reconstruction Loss: 0.609679102897644, KL Divergence Loss: 0.021509887650609016, Time elapsed for current epoch: 27.676846504211426
Epoch: 139, Reconstruction Loss: 0.6105002760887146, KL Divergence Loss: 0.02075820229947567, Time elapsed for current epoch: 27.72706913948059
Epoch: 140, Reconstruction Loss: 0.6099023818969727, KL Divergence Loss: 0.020984629169106483, Time elapsed for current epoch: 27.72289776802063
Epoch: 141, Reconstruction Loss: 0.609929084777832, KL Divergence Loss: 0.021158801391720772, Time elapsed for current epoch: 27.716670751571655
Epoch: 142, Reconstruction Loss: 0.6089475154876709, KL Divergence Loss: 0.022244960069656372, Time elapsed for current epoch: 27.7193603515625
Epoch: 143, Reconstruction Loss: 0.6103587746620178, KL Divergence Loss: 0.02026842162013054, Time elapsed for current epoch: 27.7122483253479
Epoch: 144, Reconstruction Loss: 0.6102736592292786, KL Divergence Loss: 0.021544279530644417, Time elapsed for current epoch: 27.699305534362793
Epoch: 145, Reconstruction Loss: 0.6093152761459351, KL Divergence Loss: 0.021561624482274055, Time elapsed for current epoch: 27.710375785827637
Epoch: 146, Reconstruction Loss: 0.6093712449073792, KL Divergence Loss: 0.022172091528773308, Time elapsed for current epoch: 27.72566866874695
Epoch: 147, Reconstruction Loss: 0.6087194085121155, KL Divergence Loss: 0.022602718323469162, Time elapsed for current epoch: 27.711580514907837
Epoch: 148, Reconstruction Loss: 0.6097513437271118, KL Divergence Loss: 0.020941108465194702, Time elapsed for current epoch: 27.736794471740723
Epoch: 149, Reconstruction Loss: 0.6104096174240112, KL Divergence Loss: 0.02056087926030159, Time elapsed for current epoch: 27.706080675125122
Epoch: 150, Reconstruction Loss: 0.6086795330047607, KL Divergence Loss: 0.021791817620396614, Time elapsed for current epoch: 27.746439695358276
Epoch: 151, Reconstruction Loss: 0.6090266108512878, KL Divergence Loss: 0.02173059992492199, Time elapsed for current epoch: 27.722538232803345
Epoch: 152, Reconstruction Loss: 0.6098101139068604, KL Divergence Loss: 0.021377116441726685, Time elapsed for current epoch: 27.740812063217163
Epoch: 153, Reconstruction Loss: 0.6092432141304016, KL Divergence Loss: 0.021882258355617523, Time elapsed for current epoch: 27.711708545684814
Epoch: 154, Reconstruction Loss: 0.6084659099578857, KL Divergence Loss: 0.02233508974313736, Time elapsed for current epoch: 27.718048334121704
Epoch: 155, Reconstruction Loss: 0.6099408268928528, KL Divergence Loss: 0.02069762349128723, Time elapsed for current epoch: 27.738806009292603
Epoch: 156, Reconstruction Loss: 0.6095951199531555, KL Divergence Loss: 0.021627722308039665, Time elapsed for current epoch: 27.73220920562744
Epoch: 157, Reconstruction Loss: 0.6099178194999695, KL Divergence Loss: 0.02144634537398815, Time elapsed for current epoch: 27.73341989517212
Epoch: 158, Reconstruction Loss: 0.6098973155021667, KL Divergence Loss: 0.021272970363497734, Time elapsed for current epoch: 27.7201988697052
Epoch: 159, Reconstruction Loss: 0.6082521677017212, KL Divergence Loss: 0.02254701405763626, Time elapsed for current epoch: 27.708404064178467
Epoch: 160, Reconstruction Loss: 0.6105753183364868, KL Divergence Loss: 0.0209126565605402, Time elapsed for current epoch: 27.728146076202393
Epoch: 161, Reconstruction Loss: 0.609994113445282, KL Divergence Loss: 0.021636731922626495, Time elapsed for current epoch: 27.75161862373352
Epoch: 162, Reconstruction Loss: 0.6093988418579102, KL Divergence Loss: 0.02162826992571354, Time elapsed for current epoch: 27.730151891708374
Epoch: 163, Reconstruction Loss: 0.6101583242416382, KL Divergence Loss: 0.020983150228857994, Time elapsed for current epoch: 27.733673334121704
Epoch: 164, Reconstruction Loss: 0.6096220016479492, KL Divergence Loss: 0.021251162514090538, Time elapsed for current epoch: 27.768309354782104
Epoch: 165, Reconstruction Loss: 0.6084285378456116, KL Divergence Loss: 0.0225917287170887, Time elapsed for current epoch: 27.716981172561646
Epoch: 166, Reconstruction Loss: 0.6094915866851807, KL Divergence Loss: 0.021613268181681633, Time elapsed for current epoch: 27.76215934753418
Epoch: 167, Reconstruction Loss: 0.6092656850814819, KL Divergence Loss: 0.02167339064180851, Time elapsed for current epoch: 27.747535705566406
Epoch: 168, Reconstruction Loss: 0.6099026799201965, KL Divergence Loss: 0.021664787083864212, Time elapsed for current epoch: 27.74583888053894
Epoch: 169, Reconstruction Loss: 0.6100641489028931, KL Divergence Loss: 0.020796122029423714, Time elapsed for current epoch: 27.720085382461548
Epoch: 170, Reconstruction Loss: 0.6084248423576355, KL Divergence Loss: 0.022599894553422928, Time elapsed for current epoch: 27.74331784248352
Epoch: 171, Reconstruction Loss: 0.6081529855728149, KL Divergence Loss: 0.022827764973044395, Time elapsed for current epoch: 27.691467761993408
Epoch: 172, Reconstruction Loss: 0.6093263030052185, KL Divergence Loss: 0.021089056506752968, Time elapsed for current epoch: 27.737348079681396
Epoch: 173, Reconstruction Loss: 0.6106012463569641, KL Divergence Loss: 0.02025214582681656, Time elapsed for current epoch: 27.75941824913025
Epoch: 174, Reconstruction Loss: 0.6091287136077881, KL Divergence Loss: 0.021812623366713524, Time elapsed for current epoch: 27.766566038131714
Epoch: 175, Reconstruction Loss: 0.6089884042739868, KL Divergence Loss: 0.021593093872070312, Time elapsed for current epoch: 27.71699070930481
Epoch: 176, Reconstruction Loss: 0.6087821125984192, KL Divergence Loss: 0.02175004966557026, Time elapsed for current epoch: 27.824376583099365
Epoch: 177, Reconstruction Loss: 0.6091652512550354, KL Divergence Loss: 0.021635005250573158, Time elapsed for current epoch: 27.841633319854736
Epoch: 178, Reconstruction Loss: 0.6100217700004578, KL Divergence Loss: 0.021147318184375763, Time elapsed for current epoch: 27.782075881958008
Epoch: 179, Reconstruction Loss: 0.6079843640327454, KL Divergence Loss: 0.022755881771445274, Time elapsed for current epoch: 27.81978678703308
Epoch: 180, Reconstruction Loss: 0.6100398302078247, KL Divergence Loss: 0.02069314941763878, Time elapsed for current epoch: 27.8338041305542
Epoch: 181, Reconstruction Loss: 0.6091465353965759, KL Divergence Loss: 0.021774135529994965, Time elapsed for current epoch: 27.82646417617798
Epoch: 182, Reconstruction Loss: 0.608263373374939, KL Divergence Loss: 0.022854553535580635, Time elapsed for current epoch: 27.832812547683716
Epoch: 183, Reconstruction Loss: 0.6097789406776428, KL Divergence Loss: 0.021071769297122955, Time elapsed for current epoch: 27.829007387161255
Epoch: 184, Reconstruction Loss: 0.6084601283073425, KL Divergence Loss: 0.022206272929906845, Time elapsed for current epoch: 27.804168462753296
Epoch: 185, Reconstruction Loss: 0.609559953212738, KL Divergence Loss: 0.021250005811452866, Time elapsed for current epoch: 27.792102575302124
Epoch: 186, Reconstruction Loss: 0.6088175773620605, KL Divergence Loss: 0.02230818383395672, Time elapsed for current epoch: 27.82693576812744
Epoch: 187, Reconstruction Loss: 0.6080158948898315, KL Divergence Loss: 0.022743545472621918, Time elapsed for current epoch: 27.795355319976807
Epoch: 188, Reconstruction Loss: 0.6100849509239197, KL Divergence Loss: 0.021774226799607277, Time elapsed for current epoch: 27.811322689056396
Epoch: 189, Reconstruction Loss: 0.6092031598091125, KL Divergence Loss: 0.021385880187153816, Time elapsed for current epoch: 27.853959560394287
Epoch: 190, Reconstruction Loss: 0.6085416078567505, KL Divergence Loss: 0.022565053775906563, Time elapsed for current epoch: 27.778843879699707
Epoch: 191, Reconstruction Loss: 0.6080276370048523, KL Divergence Loss: 0.02287277765572071, Time elapsed for current epoch: 27.809967041015625
Epoch: 192, Reconstruction Loss: 0.6090016961097717, KL Divergence Loss: 0.02174062468111515, Time elapsed for current epoch: 27.76979088783264
Epoch: 193, Reconstruction Loss: 0.6090512275695801, KL Divergence Loss: 0.02191600389778614, Time elapsed for current epoch: 27.75339698791504
Epoch: 194, Reconstruction Loss: 0.6097329258918762, KL Divergence Loss: 0.02084084413945675, Time elapsed for current epoch: 27.771214962005615
Epoch: 195, Reconstruction Loss: 0.6096522808074951, KL Divergence Loss: 0.021240055561065674, Time elapsed for current epoch: 27.810447216033936
Epoch: 196, Reconstruction Loss: 0.6096938848495483, KL Divergence Loss: 0.021225260570645332, Time elapsed for current epoch: 27.848260641098022
Epoch: 197, Reconstruction Loss: 0.6087343692779541, KL Divergence Loss: 0.022032737731933594, Time elapsed for current epoch: 27.82468295097351
Epoch: 198, Reconstruction Loss: 0.6095322370529175, KL Divergence Loss: 0.02107287384569645, Time elapsed for current epoch: 27.79282855987549
Epoch: 199, Reconstruction Loss: 0.6086061596870422, KL Divergence Loss: 0.022710435092449188, Time elapsed for current epoch: 27.80184841156006
Epoch: 200, Reconstruction Loss: 0.6085320711135864, KL Divergence Loss: 0.022177759557962418, Time elapsed for current epoch: 27.857168912887573
# Save the encoder
model_1_1.encoder.save('encoder.h5')
# Save the decoder
model_1_1.decoder.save('decoder.h5')
model_1_1.save('cvae_model.h5')
print("hello")
hello
model_1_1.save_weights('model_weights.weights.h5')
os.listdir("/kaggle/working")
['image_at_epoch_0158.png', 'image_at_epoch_0171.png', 'image_at_epoch_0135.png', 'image_at_epoch_0034.png', 'image_at_epoch_0039.png', 'image_at_epoch_0139.png', 'image_at_epoch_0055.png', 'image_at_epoch_0116.png', 'image_at_epoch_0105.png', 'image_at_epoch_0199.png', 'image_at_epoch_0001.png', 'image_at_epoch_0146.png', 'image_at_epoch_0069.png', 'image_at_epoch_0037.png', 'image_at_epoch_0161.png', 'image_at_epoch_0056.png', 'image_at_epoch_0169.png', 'image_at_epoch_0075.png', 'image_at_epoch_0009.png', 'image_at_epoch_0053.png', 'image_at_epoch_0129.png', 'image_at_epoch_0182.png', 'image_at_epoch_0141.png', 'image_at_epoch_0030.png', 'image_at_epoch_0189.png', 'image_at_epoch_0160.png', 'image_at_epoch_0137.png', 'image_at_epoch_0166.png', 'image_at_epoch_0122.png', 'image_at_epoch_0168.png', 'image_at_epoch_0130.png', 'image_at_epoch_0090.png', 'image_at_epoch_0072.png', 'image_at_epoch_0163.png', 'image_at_epoch_0108.png', 'image_at_epoch_0142.png', 'image_at_epoch_0085.png', 'image_at_epoch_0103.png', 'image_at_epoch_0021.png', 'image_at_epoch_0179.png', 'image_at_epoch_0017.png', 'cvae_model.h5', 'image_at_epoch_0147.png', 'model_weights.weights.h5', 'image_at_epoch_0052.png', 'image_at_epoch_0143.png', 'image_at_epoch_0081.png', 'image_at_epoch_0012.png', 'image_at_epoch_0074.png', 'image_at_epoch_0049.png', 'image_at_epoch_0080.png', 'image_at_epoch_0102.png', 'image_at_epoch_0022.png', 'image_at_epoch_0149.png', 'image_at_epoch_0124.png', 'image_at_epoch_0063.png', 'image_at_epoch_0070.png', 'image_at_epoch_0172.png', 'image_at_epoch_0007.png', 'image_at_epoch_0065.png', 'image_at_epoch_0104.png', 'image_at_epoch_0145.png', 'image_at_epoch_0096.png', 'image_at_epoch_0015.png', 'image_at_epoch_0046.png', 'image_at_epoch_0109.png', 'decoder.h5', 'image_at_epoch_0011.png', 'image_at_epoch_0043.png', 'image_at_epoch_0134.png', 'image_at_epoch_0113.png', 'image_at_epoch_0140.png', 'image_at_epoch_0167.png', 'image_at_epoch_0047.png', 'image_at_epoch_0138.png', 'image_at_epoch_0068.png', 'image_at_epoch_0008.png', 'image_at_epoch_0095.png', 'image_at_epoch_0112.png', 'image_at_epoch_0107.png', 'image_at_epoch_0117.png', 'image_at_epoch_0195.png', 'image_at_epoch_0118.png', 'image_at_epoch_0150.png', 'image_at_epoch_0025.png', 'image_at_epoch_0100.png', 'image_at_epoch_0014.png', 'image_at_epoch_0180.png', 'image_at_epoch_0035.png', 'image_at_epoch_0153.png', 'image_at_epoch_0165.png', 'image_at_epoch_0088.png', 'image_at_epoch_0154.png', 'image_at_epoch_0191.png', 'image_at_epoch_0029.png', 'image_at_epoch_0192.png', 'image_at_epoch_0196.png', 'image_at_epoch_0156.png', 'image_at_epoch_0170.png', 'image_at_epoch_0066.png', 'image_at_epoch_0003.png', 'image_at_epoch_0023.png', 'image_at_epoch_0152.png', 'image_at_epoch_0024.png', 'image_at_epoch_0059.png', 'image_at_epoch_0119.png', 'image_at_epoch_0184.png', 'image_at_epoch_0181.png', 'image_at_epoch_0061.png', 'image_at_epoch_0071.png', 'image_at_epoch_0020.png', 'image_at_epoch_0031.png', 'image_at_epoch_0086.png', 'image_at_epoch_0018.png', 'image_at_epoch_0078.png', 'image_at_epoch_0190.png', 'image_at_epoch_0151.png', 'image_at_epoch_0044.png', 'image_at_epoch_0123.png', 'image_at_epoch_0133.png', 'image_at_epoch_0178.png', 'image_at_epoch_0159.png', 'image_at_epoch_0041.png', 'image_at_epoch_0073.png', 'image_at_epoch_0045.png', 'image_at_epoch_0111.png', 'image_at_epoch_0058.png', 'image_at_epoch_0136.png', 'image_at_epoch_0120.png', 'image_at_epoch_0148.png', 'image_at_epoch_0194.png', 'image_at_epoch_0128.png', 'image_at_epoch_0198.png', 'image_at_epoch_0186.png', 'image_at_epoch_0036.png', 'image_at_epoch_0016.png', 'image_at_epoch_0038.png', 'image_at_epoch_0125.png', 'image_at_epoch_0082.png', 'image_at_epoch_0174.png', 'image_at_epoch_0127.png', 'image_at_epoch_0131.png', 'image_at_epoch_0054.png', 'image_at_epoch_0019.png', 'image_at_epoch_0115.png', 'image_at_epoch_0099.png', 'image_at_epoch_0098.png', 'image_at_epoch_0183.png', 'image_at_epoch_0157.png', 'image_at_epoch_0087.png', 'image_at_epoch_0084.png', 'image_at_epoch_0175.png', 'image_at_epoch_0177.png', 'image_at_epoch_0083.png', 'encoder.h5', 'image_at_epoch_0067.png', 'image_at_epoch_0097.png', 'image_at_epoch_0110.png', 'image_at_epoch_0106.png', 'image_at_epoch_0062.png', 'image_at_epoch_0200.png', 'image_at_epoch_0132.png', 'image_at_epoch_0162.png', 'image_at_epoch_0026.png', 'image_at_epoch_0121.png', 'image_at_epoch_0094.png', 'image_at_epoch_0197.png', 'image_at_epoch_0092.png', 'image_at_epoch_0032.png', 'image_at_epoch_0027.png', 'image_at_epoch_0188.png', 'image_at_epoch_0010.png', 'image_at_epoch_0079.png', 'image_at_epoch_0048.png', 'image_at_epoch_0042.png', 'image_at_epoch_0176.png', 'image_at_epoch_0051.png', 'image_at_epoch_0040.png', 'image_at_epoch_0155.png', 'image_at_epoch_0060.png', 'image_at_epoch_0164.png', 'image_at_epoch_0076.png', '.virtual_documents', 'image_at_epoch_0114.png', 'image_at_epoch_0101.png', 'image_at_epoch_0093.png', 'image_at_epoch_0193.png', 'image_at_epoch_0185.png', 'image_at_epoch_0144.png', 'image_at_epoch_0173.png', 'image_at_epoch_0006.png', 'image_at_epoch_0077.png', 'image_at_epoch_0089.png', 'image_at_epoch_0028.png', 'image_at_epoch_0033.png', 'image_at_epoch_0064.png', 'image_at_epoch_0002.png', 'image_at_epoch_0050.png', 'image_at_epoch_0057.png', 'image_at_epoch_0091.png', 'image_at_epoch_0126.png', 'image_at_epoch_0005.png', 'image_at_epoch_0000.png', 'image_at_epoch_0004.png', 'image_at_epoch_0013.png', 'image_at_epoch_0187.png']
import os
print(os.getcwd())
/kaggle/working
os.makedirs('subdirectory_name', exist_ok=True)
model_1_1.save_weights('subdirectory_name/model_weights.weights.h5')
model_1_1.save_weights('/kaggle/working/model_weights.weights.h5')
from IPython.display import FileLink
# Generate a link to download the file
FileLink('/kaggle/working/subdirectory_name/model_weights.weights.h5')
p